library(rstan)
library(ggmcmc)
library(bayesplot)
library(bridgesampling)
load("data_for_stan.RData")
list_for_stan <- list(nY = nrow(data_for_stan), nS = max(data_for_stan$participant), Subj = data_for_stan$participant, size_diff = data_for_stan$size, num_of_trials = data_for_stan$number_of_fixational_trials, Y = data_for_stan$answer.keys)

Проверка гипотезы о связи типа иллюзии и разницы между стимулами (больше разница - больше вероятность контрастной иллюзии)

# компилируем модели, соответствующие двум гипотезам
stanmodelH0 <- stan_model('H0_1.stan', model_name = 'H0') 
stanmodelH1 <- stan_model('H1_1.stan', model_name = 'H1')

# сэмплируем "предсказания" каждой из гипотез (моделей), нужно сгенерировать очень много 
fit_H0 <- sampling(stanmodelH0, list_for_stan, iter = 20000, warmup = 1000)
## 
## SAMPLING FOR MODEL 'H0' NOW (CHAIN 1).
## Chain 1: 
## Chain 1: Gradient evaluation took 8e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.8 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1: 
## Chain 1: 
## Chain 1: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 1: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 1: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 1: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 1: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 1: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 1: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 1: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 1: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 1: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 1: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 1: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 1: 
## Chain 1:  Elapsed Time: 0.529801 seconds (Warm-up)
## Chain 1:                9.61802 seconds (Sampling)
## Chain 1:                10.1478 seconds (Total)
## Chain 1: 
## 
## SAMPLING FOR MODEL 'H0' NOW (CHAIN 2).
## Chain 2: 
## Chain 2: Gradient evaluation took 4.5e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.45 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2: 
## Chain 2: 
## Chain 2: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 2: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 2: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 2: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 2: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 2: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 2: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 2: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 2: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 2: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 2: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 2: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 2: 
## Chain 2:  Elapsed Time: 0.530467 seconds (Warm-up)
## Chain 2:                10.7303 seconds (Sampling)
## Chain 2:                11.2608 seconds (Total)
## Chain 2: 
## 
## SAMPLING FOR MODEL 'H0' NOW (CHAIN 3).
## Chain 3: 
## Chain 3: Gradient evaluation took 4.7e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.47 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3: 
## Chain 3: 
## Chain 3: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 3: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 3: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 3: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 3: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 3: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 3: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 3: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 3: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 3: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 3: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 3: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 3: 
## Chain 3:  Elapsed Time: 0.529252 seconds (Warm-up)
## Chain 3:                10.951 seconds (Sampling)
## Chain 3:                11.4803 seconds (Total)
## Chain 3: 
## 
## SAMPLING FOR MODEL 'H0' NOW (CHAIN 4).
## Chain 4: 
## Chain 4: Gradient evaluation took 5e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.5 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4: 
## Chain 4: 
## Chain 4: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 4: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 4: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 4: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 4: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 4: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 4: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 4: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 4: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 4: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 4: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 4: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 4: 
## Chain 4:  Elapsed Time: 0.517536 seconds (Warm-up)
## Chain 4:                11.0881 seconds (Sampling)
## Chain 4:                11.6057 seconds (Total)
## Chain 4:
fit_H1 <- sampling(stanmodelH1, list_for_stan, iter = 20000, warmup = 1000)
## 
## SAMPLING FOR MODEL 'H1' NOW (CHAIN 1).
## Chain 1: 
## Chain 1: Gradient evaluation took 5.2e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.52 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1: 
## Chain 1: 
## Chain 1: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 1: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 1: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 1: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 1: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 1: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 1: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 1: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 1: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 1: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 1: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 1: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 1: 
## Chain 1:  Elapsed Time: 0.526267 seconds (Warm-up)
## Chain 1:                10.8238 seconds (Sampling)
## Chain 1:                11.3501 seconds (Total)
## Chain 1: 
## 
## SAMPLING FOR MODEL 'H1' NOW (CHAIN 2).
## Chain 2: 
## Chain 2: Gradient evaluation took 4.6e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.46 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2: 
## Chain 2: 
## Chain 2: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 2: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 2: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 2: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 2: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 2: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 2: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 2: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 2: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 2: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 2: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 2: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 2: 
## Chain 2:  Elapsed Time: 0.551555 seconds (Warm-up)
## Chain 2:                11.0949 seconds (Sampling)
## Chain 2:                11.6464 seconds (Total)
## Chain 2: 
## 
## SAMPLING FOR MODEL 'H1' NOW (CHAIN 3).
## Chain 3: 
## Chain 3: Gradient evaluation took 4.6e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.46 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3: 
## Chain 3: 
## Chain 3: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 3: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 3: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 3: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 3: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 3: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 3: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 3: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 3: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 3: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 3: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 3: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 3: 
## Chain 3:  Elapsed Time: 0.557443 seconds (Warm-up)
## Chain 3:                11.307 seconds (Sampling)
## Chain 3:                11.8645 seconds (Total)
## Chain 3: 
## 
## SAMPLING FOR MODEL 'H1' NOW (CHAIN 4).
## Chain 4: 
## Chain 4: Gradient evaluation took 4.2e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.42 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4: 
## Chain 4: 
## Chain 4: Iteration:     1 / 20000 [  0%]  (Warmup)
## Chain 4: Iteration:  1001 / 20000 [  5%]  (Sampling)
## Chain 4: Iteration:  3000 / 20000 [ 15%]  (Sampling)
## Chain 4: Iteration:  5000 / 20000 [ 25%]  (Sampling)
## Chain 4: Iteration:  7000 / 20000 [ 35%]  (Sampling)
## Chain 4: Iteration:  9000 / 20000 [ 45%]  (Sampling)
## Chain 4: Iteration: 11000 / 20000 [ 55%]  (Sampling)
## Chain 4: Iteration: 13000 / 20000 [ 65%]  (Sampling)
## Chain 4: Iteration: 15000 / 20000 [ 75%]  (Sampling)
## Chain 4: Iteration: 17000 / 20000 [ 85%]  (Sampling)
## Chain 4: Iteration: 19000 / 20000 [ 95%]  (Sampling)
## Chain 4: Iteration: 20000 / 20000 [100%]  (Sampling)
## Chain 4: 
## Chain 4:  Elapsed Time: 0.552729 seconds (Warm-up)
## Chain 4:                11.613 seconds (Sampling)
## Chain 4:                12.1657 seconds (Total)
## Chain 4:
print(fit_H0)
## Inference for Stan model: H0.
## 4 chains, each with iter=20000; warmup=1000; thin=1; 
## post-warmup draws per chain=19000, total post-warmup draws=76000.
## 
##              mean se_mean   sd    2.5%     25%     50%     75%   97.5%
## intercept    0.30    0.00 0.39   -0.41    0.03    0.28    0.55    1.10
## beta_sd      1.22    0.00 0.33    0.72    0.99    1.17    1.40    1.99
## beta[1]     -0.09    0.00 0.32   -0.74   -0.31   -0.09    0.12    0.53
## beta[2]      0.42    0.00 0.49   -0.48    0.09    0.39    0.72    1.46
## beta[3]     -1.28    0.00 0.63   -2.74   -1.63   -1.20   -0.83   -0.29
## beta[4]     -1.88    0.00 0.85   -3.88   -2.32   -1.74   -1.28   -0.63
## beta[5]     -0.88    0.00 0.48   -1.94   -1.17   -0.83   -0.54   -0.06
## beta[6]      0.92    0.00 0.63   -0.11    0.48    0.84    1.28    2.36
## beta[7]      0.42    0.00 0.38   -0.29    0.16    0.41    0.66    1.22
## beta[8]      0.35    0.00 0.35   -0.32    0.10    0.33    0.57    1.09
## beta[9]      0.01    0.00 0.39   -0.75   -0.24    0.01    0.27    0.80
## beta[10]    -1.63    0.00 0.83   -3.62   -2.07   -1.50   -1.05   -0.40
## beta[11]    -1.07    0.00 0.41   -1.99   -1.32   -1.03   -0.78   -0.36
## beta[12]    -0.99    0.00 0.45   -1.97   -1.26   -0.95   -0.67   -0.20
## beta[13]    -1.16    0.00 0.88   -3.23   -1.63   -1.04   -0.55    0.22
## beta[14]    -0.71    0.00 0.37   -1.51   -0.94   -0.69   -0.46   -0.05
## beta[15]    -1.84    0.00 0.82   -3.81   -2.26   -1.71   -1.26   -0.63
## beta[16]    -0.52    0.00 0.33   -1.21   -0.73   -0.50   -0.29    0.10
## beta[17]     0.46    0.00 0.36   -0.20    0.21    0.44    0.69    1.23
## beta[18]    -0.80    0.00 0.70   -2.36   -1.20   -0.73   -0.33    0.39
## beta[19]    -1.98    0.00 0.80   -3.88   -2.40   -1.85   -1.41   -0.79
## beta[20]     0.17    0.00 0.34   -0.47   -0.06    0.16    0.39    0.86
## beta[21]    -1.15    0.00 0.84   -3.16   -1.59   -1.02   -0.56    0.13
## beta[22]    -0.88    0.00 0.39   -1.73   -1.12   -0.85   -0.60   -0.19
## beta[23]     0.63    0.00 0.36   -0.01    0.38    0.60    0.85    1.39
## beta[24]     1.05    0.00 0.49    0.22    0.70    1.00    1.34    2.16
## beta[25]     0.07    0.00 0.47   -0.85   -0.24    0.07    0.37    1.02
## beta[26]    -1.62    0.00 0.85   -3.63   -2.07   -1.49   -1.02   -0.32
## lp__      -128.33    0.04 4.80 -138.76 -131.35 -127.95 -124.91 -120.02
##           n_eff Rhat
## intercept 21284    1
## beta_sd   19044    1
## beta[1]   44734    1
## beta[2]   69192    1
## beta[3]   40548    1
## beta[4]   31713    1
## beta[5]   42251    1
## beta[6]   64130    1
## beta[7]   50712    1
## beta[8]   54525    1
## beta[9]   54264    1
## beta[10]  33796    1
## beta[11]  41271    1
## beta[12]  41058    1
## beta[13]  42345    1
## beta[14]  44790    1
## beta[15]  32982    1
## beta[16]  44559    1
## beta[17]  55574    1
## beta[18]  49312    1
## beta[19]  31470    1
## beta[20]  58536    1
## beta[21]  38604    1
## beta[22]  40576    1
## beta[23]  53063    1
## beta[24]  60305    1
## beta[25]  62868    1
## beta[26]  35946    1
## lp__      17381    1
## 
## Samples were drawn using NUTS(diag_e) at Fri Jan  4 17:30:10 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at 
## convergence, Rhat=1).
print(fit_H1)
## Inference for Stan model: H1.
## 4 chains, each with iter=20000; warmup=1000; thin=1; 
## post-warmup draws per chain=19000, total post-warmup draws=76000.
## 
##              mean se_mean   sd    2.5%     25%     50%     75%   97.5%
## intercept    1.14    0.00 0.45    0.27    0.84    1.14    1.45    2.05
## beta_mu     -1.04    0.00 0.36   -1.79   -1.27   -1.03   -0.80   -0.38
## beta_sd      1.15    0.00 0.28    0.71    0.95    1.11    1.31    1.81
## beta[1]     -0.52    0.00 0.35   -1.22   -0.75   -0.52   -0.29    0.15
## beta[2]     -0.10    0.00 0.49   -1.01   -0.42   -0.11    0.21    0.92
## beta[3]     -1.83    0.00 0.70   -3.44   -2.24   -1.75   -1.33   -0.69
## beta[4]     -2.51    0.01 0.90   -4.59   -3.02   -2.39   -1.87   -1.11
## beta[5]     -1.39    0.00 0.53   -2.56   -1.72   -1.34   -1.01   -0.46
## beta[6]      0.36    0.00 0.58   -0.62   -0.04    0.30    0.69    1.69
## beta[7]     -0.06    0.00 0.39   -0.78   -0.32   -0.07    0.19    0.73
## beta[8]     -0.07    0.00 0.36   -0.75   -0.32   -0.08    0.16    0.67
## beta[9]     -0.41    0.00 0.41   -1.22   -0.68   -0.41   -0.14    0.39
## beta[10]    -2.22    0.00 0.89   -4.31   -2.71   -2.10   -1.60   -0.85
## beta[11]    -1.55    0.00 0.47   -2.56   -1.83   -1.51   -1.22   -0.74
## beta[12]    -1.52    0.00 0.51   -2.63   -1.84   -1.49   -1.16   -0.62
## beta[13]    -1.77    0.00 0.94   -3.98   -2.30   -1.65   -1.11   -0.28
## beta[14]    -1.13    0.00 0.41   -2.01   -1.39   -1.10   -0.85   -0.40
## beta[15]    -2.43    0.00 0.87   -4.48   -2.92   -2.31   -1.81   -1.08
## beta[16]    -0.92    0.00 0.36   -1.68   -1.15   -0.90   -0.67   -0.25
## beta[17]     0.05    0.00 0.36   -0.62   -0.20    0.03    0.28    0.81
## beta[18]    -1.40    0.00 0.76   -3.08   -1.85   -1.32   -0.87   -0.10
## beta[19]    -2.56    0.00 0.85   -4.57   -3.03   -2.44   -1.96   -1.25
## beta[20]    -0.19    0.00 0.35   -0.86   -0.43   -0.20    0.04    0.51
## beta[21]    -1.72    0.00 0.92   -3.86   -2.23   -1.58   -1.07   -0.30
## beta[22]    -1.35    0.00 0.44   -2.31   -1.62   -1.31   -1.04   -0.56
## beta[23]     0.21    0.00 0.36   -0.43   -0.03    0.20    0.44    0.96
## beta[24]     0.55    0.00 0.47   -0.25    0.22    0.50    0.82    1.59
## beta[25]    -0.42    0.00 0.49   -1.37   -0.74   -0.42   -0.10    0.55
## beta[26]    -2.23    0.00 0.90   -4.31   -2.75   -2.12   -1.60   -0.81
## lp__      -124.19    0.04 4.87 -134.72 -127.23 -123.82 -120.70 -115.81
##           n_eff Rhat
## intercept 19217    1
## beta_mu   22272    1
## beta_sd   22099    1
## beta[1]   36334    1
## beta[2]   51937    1
## beta[3]   37464    1
## beta[4]   31306    1
## beta[5]   40163    1
## beta[6]   52185    1
## beta[7]   40257    1
## beta[8]   43306    1
## beta[9]   48985    1
## beta[10]  34207    1
## beta[11]  37284    1
## beta[12]  36047    1
## beta[13]  40344    1
## beta[14]  40891    1
## beta[15]  33453    1
## beta[16]  40506    1
## beta[17]  47035    1
## beta[18]  48444    1
## beta[19]  31439    1
## beta[20]  48931    1
## beta[21]  40631    1
## beta[22]  36549    1
## beta[23]  42963    1
## beta[24]  50616    1
## beta[25]  53176    1
## beta[26]  36680    1
## lp__      17506    1
## 
## Samples were drawn using NUTS(diag_e) at Fri Jan  4 17:30:57 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at 
## convergence, Rhat=1).

Диагностика

posterior_H0 <- ggs(fit_H0)
posterior_H1 <- ggs(fit_H1)

ggmcmc(D = posterior_H0, file = NULL, family = 'beta', plot = 'ggs_histogram')
## Plotting histograms

## Time taken to generate the report: 18 seconds.
ggmcmc(D = posterior_H1, file = NULL, family = 'beta', plot = 'ggs_histogram')
## Plotting histograms

## Time taken to generate the report: 14 seconds.
ggmcmc(D = posterior_H0, file = NULL, family = 'beta', plot = 'ggs_compare_partial')
## Plotting comparison of partial and full chain

## Time taken to generate the report: 29 seconds.
ggmcmc(D = posterior_H1, file = NULL, family = 'beta', plot = 'ggs_compare_partial')
## Plotting comparison of partial and full chain

## Time taken to generate the report: 30 seconds.
ggmcmc(D = posterior_H0, file = NULL, family = 'beta', plot = 'ggs_traceplot')
## Plotting traceplots

## Time taken to generate the report: 62 seconds.
ggmcmc(D = posterior_H1, file = NULL, family = 'beta', plot = 'ggs_traceplot')
## Plotting traceplots

## Time taken to generate the report: 64 seconds.
ggmcmc(D = posterior_H0, file = NULL, family = 'beta', plot = 'ggs_autocorrelation')
## Plotting autocorrelation plots

## Time taken to generate the report: 43 seconds.
ggmcmc(D = posterior_H1, file = NULL, family = 'beta', plot = 'ggs_autocorrelation')
## Plotting autocorrelation plots

## Time taken to generate the report: 45 seconds.

Байес-фактор

# считаем логарифм правдоподобия имеющихся данных для каждой из гипотез (моделей)
H0_res <- bridge_sampler(fit_H0, silent = TRUE)
H1_res <- bridge_sampler(fit_H1, silent = TRUE)
print(H0_res)
## Bridge sampling estimate of the log marginal likelihood: -112.1187
## Estimate obtained in 5 iteration(s) via method "normal".
print(H1_res)
## Bridge sampling estimate of the log marginal likelihood: -107.2271
## Estimate obtained in 5 iteration(s) via method "normal".
# смотрим на оценку возможной ошибки подсчета правдоподобия
error_measures(H0_res)$percentage
## [1] "0.417%"
error_measures(H1_res)$percentage
## [1] "0.400%"

Предполагаем, что обе гипотезы одинаково вероятны (имеют одинаковую априорную вероятность)

# считаем Байес-фактор (в пользу альтернативной гипотезы)
BF10 <- bf(H1_res, H0_res)
print(BF10)
## Estimated Bayes factor in favor of H1_res over H0_res: 133.16600
# считаем Байес-фактор (в пользу нулевой гипотезы)
BF01 <- bf(H0_res, H1_res)
print(BF01)
## Estimated Bayes factor in favor of H0_res over H1_res: 0.00751

Далее: пробуем разные нулевые и альтернативные гипотезы